import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime, date, time
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import cv2
import math
import scipy.misc
import hashlib
from os import listdir
from os.path import isfile, join
import os
import random
# notebook in src/training => set the project folder as root
# /!\ execute once
os.chdir("../../")
from src.training import mpii
from src.utils.drawer import Drawer
drawer = Drawer()
draw bounding box + 2d skeleton of all persons in a given : mpiiDataset, entryId
def drawImage(mpiiDataset, entryId, thickness=8):
img = mpiiDataset.getImage(entryId)
for personId in range(mpiiDataset.getTotalPeopleOn(entryId)):
bbox = mpiiDataset.getBoundingBox(entryId, personId)
bbox = {'minX':bbox[0], 'maxX':bbox[1], 'minY':bbox[2], 'maxY':bbox[3]}
convertedJoints = []
for joint in mpiiDataset.getJoints(entryId, personId):
convertedJoints.append([int(joint['x']), int(joint['y'])])
convertedJoints = np.array(convertedJoints)
img = drawer.drawBBox(img, bbox)
img = drawer.draw_2d_skeleton(img, convertedJoints, thickness)
plt.imshow(img)
plt.show()
# images
imgFolder = "dataset/mpii/data/images"
# previous and next frames (not annotated)
videoFolder = "dataset/mpii/data/videos"
# images annotations
imgAnnot = 'dataset/mpii/data/metadata/mpii_human_pose_v1_u12_1.mat'
# videos annotations (link annotated_img => previous_and_next_frames)
videoAnnot = 'dataset/mpii/data/metadata/mpii_human_pose_v1_sequences_keyframes.mat'
dataset = mpii.Reader.build(imgFolder,videoFolder, imgAnnot, videoAnnot)
dataset.size()
drawImage(dataset, 10)
dataset.getMetadata(10)
mpii.py MetaMapper can either :
mpii.py ImageMapper :
new dataset can be created easily with a mapper
# this is a metaMapper function : meta =>
# if returned meta == None :then: filtered
# here : keep only images where some people are not annotated
metaMapper = lambda meta: meta if meta["initTotalUnannotated"] > 0 else None
somePersonNotAnnotateddataset = dataset.mapMetadata(metaMapper)
print(somePersonNotAnnotateddataset.size())
for i in range(somePersonNotAnnotateddataset.size()):
drawImage(somePersonNotAnnotateddataset, i)
if(i>10):
break
# filter the examples above :
metaMapper = lambda meta: meta if meta["initTotalUnannotated"] == 0 else None
dataset = dataset.mapMetadata(metaMapper)
dataset.size()
def getFilterFor(category, activity):
def metaMapper(meta):
isOk = meta["category"] != None
isOk = isOk and category in meta["category"]
isOk = isOk and meta["activity"] != None
isOk = isOk and activity in meta["activity"]
return meta if isOk else None
return metaMapper
tmpDataset = dataset.mapMetadata(getFilterFor("music","orchestra"))
print(tmpDataset.size())
for i in range(tmpDataset.size()):
drawImage(tmpDataset, i)
if i>5:
break;
tmpDataset = dataset.mapMetadata(getFilterFor("","bicycling, racing"))
print(tmpDataset.size())
for i in range(tmpDataset.size()):
drawImage(tmpDataset, i)
if i>5:
break;
tmpDataset = dataset.mapMetadata(getFilterFor("water","canoe"))
print(tmpDataset.size())
for i in range(tmpDataset.size()):
drawImage(tmpDataset, i)
if i>5:
break;
tmpDataset = dataset.mapMetadata(getFilterFor("water","swim"))
print(tmpDataset.size())
for i in range(tmpDataset.size()):
drawImage(tmpDataset, i)
if i>5:
break;
# filter the examples above :
def filterCategory(activity):
return lambda meta : meta if (meta["activity"] != None and activity not in meta["activity"]) else None
dataset = dataset.mapMetadata(filterCategory("orchestra"))
dataset = dataset.mapMetadata(filterCategory("bicycling, racing"))
dataset = dataset.mapMetadata(filterCategory("canoe"))
dataset = dataset.mapMetadata(filterCategory("swim"))
dataset.size()
dataset = dataset.mapMetadata(mpii.MetaMapper.filterAllJointsInsideScene())
dataset.size()
The woman isnt labeled anymore because of missing ankle
drawImage(dataset, 6)
filter images with new unannotated people
dataset = dataset.mapMetadata(mpii.MetaMapper.containsUnannotatedPeople())
drawImage(dataset, 6)
dataset.size()
all persons in the image has to have a bbox_area >= totalpix_given_as_arg
print(dataset.getBoundingBox(0,0))
print(dataset.getBoundingBox(0,1))
drawImage(dataset, 0)
tmpDataset = dataset.mapMetadata(mpii.MetaMapper.filterSmallPeople(400*150))
drawImage(tmpDataset, 0)
some images has still unannotated people. A pre train object detection could be used to detect such cases. This filter remove all the images contained in the argument (iterable of imgName)
imgNames = []
for i in range(8):
print(i)
if(i<5):
imgNames.append(dataset.getImageName(i))
drawImage(dataset, i)
tmpDataset = dataset.mapMetadata(mpii.MetaMapper.filterImageNameIn(imgNames))
print("======================= FIlTERED ===============================")
for i in range(3):
drawImage(tmpDataset, i)
filter metadata when people overlap : filter if float(intersectionArea)/min(person1_area, person2_area) <= givenRate
# from the tmp metadata above
# max 20% of shared space bewteen people
sharedPrecent = 0.2
drawImage(tmpDataset, 0)
drawImage(tmpDataset, 1)
drawImage(tmpDataset, 2)
tmpDataset2 = tmpDataset.mapMetadata(mpii.MetaMapper.filterOverlappedPeople(sharedPrecent))
print("======================= FIlTERED ===============================")
drawImage(tmpDataset2, 0)
drawImage(tmpDataset2, 1)
drawImage(tmpDataset2, 2)
# demo on imgId = 25
drawImage(dataset,25)
can crop 480/640 in the center
cropCenter = 1
datasetResized_center = dataset.mapMetadata(mpii.MetaMapper.cropImageToRatio(480/640.0, cropCenter))
datasetResized_center = datasetResized_center.mapImage(mpii.ImageMapper.cropImageToRatio(480.0/640.0, cropCenter))
drawImage(datasetResized_center,25)
can also crop 480/640 left/right
# can also crop left/right
left = 0
right = 2
datasetResized_left = dataset.mapMetadata(mpii.MetaMapper.cropImageToRatio(480/640.0, left))
datasetResized_left = datasetResized_left.mapImage(mpii.ImageMapper.cropImageToRatio(480.0/640.0, left))
datasetResized_right = dataset.mapMetadata(mpii.MetaMapper.cropImageToRatio(480/640.0, right))
datasetResized_right = datasetResized_right.mapImage(mpii.ImageMapper.cropImageToRatio(480.0/640.0, right))
drawImage(datasetResized_left,25)
drawImage(datasetResized_right,25)
# demo on imgId = 10 : initial img
drawImage(dataset,10)
# cropped center to get the right height/width ratio
drawImage(datasetResized_center,10)
Apply the resize to get 640x480 resolution
datasetResized_center = datasetResized_center.mapMetadata(mpii.MetaMapper.resizeTo(640, 480))
datasetResized_center = datasetResized_center.mapImage(mpii.ImageMapper.resizeTo(640, 480))
The image has now 640x480 without distortion
drawImage(datasetResized_center,10)
after resizing, joint position keep consistent, however they could stand outside the image
drawImage(datasetResized_center,26)
cleanedDataset = datasetResized_center.mapMetadata(mpii.MetaMapper.filterAllJointsInsideScene())
# note : one image was filter : imgId=26 before : imgId=25 here
drawImage(cleanedDataset,25)
# as before i can filter all images that contains unannotated people (like the one above)
print(cleanedDataset.size())
cleanedDataset = cleanedDataset.mapMetadata(mpii.MetaMapper.containsUnannotatedPeople())
print(cleanedDataset.size())
drawImage(cleanedDataset,25)
drawImage(cleanedDataset,21)
# sym on image
symDataset = cleanedDataset.mapImage(mpii.ImageMapper.verticalFlip())
# sym on meta
symDataset = symDataset.mapMetadata(mpii.MetaMapper.verticalFlip())
# TODO : imageShape constante : do recursive call to child
drawImage(symDataset,21)
# in my opinion : valueCoeff=0.3, saturationCoeff=0.3 enough for slight modif
symDatasetVar = symDataset.mapImage(mpii.ImageMapper.randomValueSaturation(valueCoeff=0.8, saturationCoeff=0.8))
cleanedDatasetVar = cleanedDataset.mapImage(mpii.ImageMapper.randomValueSaturation(valueCoeff=0.8, saturationCoeff=0.8))
for _ in range(10):
drawImage(symDatasetVar,48)
drawImage(cleanedDatasetVar,48)
# symDatasetVar
# cleanedDatasetVar
for i in range(5):
degreeRot = random.random()*360
# rotate sym dataset
symDatasetVarRotated = symDatasetVar.mapImage(mpii.ImageMapper.rotate(degreeRot))
symDatasetVarRotated = symDatasetVarRotated.mapMetadata(mpii.MetaMapper.rotate(degreeRot))
# rotate dataset
cleanedDatasetVarRotated = cleanedDatasetVar.mapImage(mpii.ImageMapper.rotate(degreeRot))
cleanedDatasetVarRotated = cleanedDatasetVarRotated.mapMetadata(mpii.MetaMapper.rotate(degreeRot))
drawImage(symDatasetVarRotated, 48)
drawImage(cleanedDatasetVarRotated, 48)
In some case we do not want the 0 padding added by a random rotation (the network could for example use the 0 padding as an indicator for detecting weird rotated pose)
transposeDataset = symDataset.mapImage(mpii.ImageMapper.transpose())
transposeDataset = transposeDataset.mapMetadata(mpii.MetaMapper.transpose())
drawImage(symDataset, 10)
drawImage(transposeDataset,10)